In [1]:
import glob
import math
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import numpy as np
import random
import sklearn.metrics as metrics

from tensorflow.keras import optimizers
from tensorflow.keras.callbacks import ModelCheckpoint, CSVLogger, LearningRateScheduler
from tensorflow.keras.models import Model
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import add, concatenate, Conv2D, Dense, Dropout, Flatten, Input
from tensorflow.keras.layers import Activation, AveragePooling2D, BatchNormalization, MaxPooling2D
from tensorflow.keras.regularizers import l2
from tensorflow.keras.utils import to_categorical


%matplotlib inline
In [2]:
                            # Set up 'ggplot' style
plt.style.use('ggplot')     # if want to use the default style, set 'classic'
plt.rcParams['ytick.right']     = True
plt.rcParams['ytick.labelright']= True
plt.rcParams['ytick.left']      = False
plt.rcParams['ytick.labelleft'] = False
plt.rcParams['font.family']     = 'Arial'
In [3]:
# where am i?
%pwd
Out[3]:
'C:\\Users\\david\\Documents\\ImageNet'
In [4]:
flowers = glob.glob('./data/flr_*.jpg')
fungus = glob.glob('./data/fgs_*.jpg')
rocks = glob.glob('./data/rck_*.jpg')

pixel_flowers = glob.glob('./data/pxl_flower_*.jpeg')
pixel_umbrella = glob.glob('./data/pxl_umbrella_*.jpeg')
print("There are %s, %s flower, %s fungus, %s rock and %s umbrella pictures" %(len(flowers), len(pixel_flowers), len(fungus), len(rocks), len(pixel_umbrella)))
There are 1269, 1792 flower, 856 fungus, 1007 rock and 420 umbrella pictures
In [5]:
# Randomly show 10 examples of the images
from IPython.display import Image
    
dataset = flowers #flowers #fungus #rocks

for i in range(0, 5):
    index = random.randint(0, len(dataset)-1)   
    print("Showing:", dataset[index])
    
    img = mpimg.imread(dataset[index])
    imgplot = plt.imshow(img)
    plt.show()

#Image(dataset[index])
Showing: ./data\flr_01500.jpg
Showing: ./data\flr_00328.jpg
Showing: ./data\flr_01052.jpg
Showing: ./data\flr_01821.jpg
Showing: ./data\flr_00812.jpg

Extract the training and testing datasets

In [6]:
# Load the data
trDatOrg       = np.load('flrnonflr-train-imgs96-0.8.npz')['arr_0']
trLblOrg       = np.load('flrnonflr-train-labels96-0.8.npz')['arr_0']
tsDatOrg       = np.load('flrnonflr-test-imgs96-0.8.npz')['arr_0']
tsLblOrg       = np.load('flrnonflr-test-labels96-0.8.npz')['arr_0']
In [7]:
print("For the training and test datasets:")
print("The shapes are %s, %s, %s, %s" \
      %(trDatOrg.shape, trLblOrg.shape, tsDatOrg.shape, tsLblOrg.shape))
For the training and test datasets:
The shapes are (4264, 96, 96, 3), (4264,), (1067, 96, 96, 3), (1067,)
In [8]:
# Randomly show 10 examples of the images

data = tsDatOrg
label = tsLblOrg

for i in range(20):
    index = random.randint(0, len(data)-1)
    print("Showing %s index image, It is %s" %(index, label[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 246 index image, It is 1.0
Showing 1019 index image, It is 0.0
Showing 671 index image, It is 0.0
Showing 865 index image, It is 0.0
Showing 832 index image, It is 0.0
Showing 591 index image, It is 1.0
Showing 6 index image, It is 1.0
Showing 345 index image, It is 1.0
Showing 912 index image, It is 0.0
Showing 708 index image, It is 0.0
Showing 871 index image, It is 0.0
Showing 328 index image, It is 1.0
Showing 861 index image, It is 0.0
Showing 811 index image, It is 0.0
Showing 358 index image, It is 1.0
Showing 452 index image, It is 1.0
Showing 846 index image, It is 0.0
Showing 608 index image, It is 1.0
Showing 146 index image, It is 1.0
Showing 781 index image, It is 0.0
In [9]:
# Convert the data into 'float32'
# Rescale the values from 0~255 to 0~1
trDat       = trDatOrg.astype('float32')/255
tsDat       = tsDatOrg.astype('float32')/255

# Retrieve the row size of each image
# Retrieve the column size of each image
imgrows     = trDat.shape[1]
imgclms     = trDat.shape[2]
channel     = 3

# # reshape the data to be [samples][width][height][channel]
# # This is required by Keras framework
# trDat       = trDat.reshape(trDat.shape[0], imgrows, imgclms, channel)
# tsDat       = tsDat.reshape(tsDat.shape[0], imgrows, imgclms, channel)

# Perform one hot encoding on the labels
# Retrieve the number of classes in this problem
trLbl       = to_categorical(trLblOrg)
tsLbl       = to_categorical(tsLblOrg)
num_classes = tsLbl.shape[1]
In [10]:
# fix random seed for reproducibility
seed = 29
np.random.seed(seed)


modelname = 'FlowerPower'

def createBaselineModel():
    inputs = Input(shape=(imgrows, imgclms, channel))
    x = Conv2D(30, (4, 4), activation='relu')(inputs)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Conv2D(50, (4, 4), activation='relu')(x)
    x = MaxPooling2D(pool_size=(2, 2))(x)
    x = Dropout(0.3)(x)
    x = Flatten()(x)
    x = Dense(32, activation='relu')(x)
    x = Dense(num_classes, activation='softmax')(x)
    
    model = Model(inputs=[inputs],outputs=x)
    
    model.compile(loss='categorical_crossentropy', 
                  optimizer='adam',
                  metrics=['accuracy'])
    return model

optmz       = optimizers.Adam(lr=0.001)

def resLyr(inputs,
           numFilters=16,
           kernelSz=3,
           strides=1,
           activation='relu',
           batchNorm=True,
           convFirst=True,
           lyrName=None):
    convLyr = Conv2D(numFilters, kernel_size=kernelSz, strides=strides, 
                     padding='same', kernel_initializer='he_normal', 
                     kernel_regularizer=l2(1e-4), 
                     name=lyrName+'_conv' if lyrName else None)
    x = inputs
    if convFirst:
        x = convLyr(x)
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation,name=lyrName+'_'+activation if lyrName else None)(x)
    else:
        if batchNorm:
            x = BatchNormalization(name=lyrName+'_bn' if lyrName else None)(x)
        if activation is not None:
            x = Activation(activation, name=lyrName+'_'+activation if lyrName else None)(x)
        x = convLyr(x)
    return x


def resBlkV1(inputs,
             numFilters=16,
             numBlocks=3,
             downsampleOnFirst=True,
             names=None):
    x = inputs
    for run in range(0,numBlocks):
        strides = 1
        blkStr = str(run+1)
        if downsampleOnFirst and run == 0:
            strides = 2
        y = resLyr(inputs=x, numFilters=numFilters, strides=strides,
                   lyrName=names+'_Blk'+blkStr+'_Res1' if names else None)
        y = resLyr(inputs=y, numFilters=numFilters, activation=None,
                   lyrName=names+'_Blk'+blkStr+'_Res2' if names else None)
        if downsampleOnFirst and run == 0:
            x = resLyr(inputs=x, numFilters=numFilters, kernelSz=1,
                       strides=strides, activation=None, batchNorm=False,
                       lyrName=names+'_Blk'+blkStr+'_lin' if names else None)
        x = add([x,y], name=names+'_Blk'+blkStr+'_add' if names else None)
        x = Activation('relu', name=names+'_Blk'+blkStr+'_relu' if names else None)(x)
    return x

def createResNetV1(inputShape=(imgrows, imgclms, channel),
                   numClasses=2):
    inputs = Input(shape=inputShape)
    v = resLyr(inputs, lyrName='Inpt')
    v = resBlkV1(inputs=v, numFilters=16, numBlocks=3,
                 downsampleOnFirst=False, names='Stg1')
    v = resBlkV1(inputs=v, numFilters=32, numBlocks=3,
                 downsampleOnFirst=True, names='Stg2')
    v = resBlkV1(inputs=v, numFilters=64, numBlocks=3,
                 downsampleOnFirst=True, names='Stg3')
    v = AveragePooling2D(pool_size=8, name='AvgPool')(v)
    v = Flatten()(v) 
    outputs = Dense(numClasses, activation='softmax', 
                    kernel_initializer='he_normal')(v)
    model = Model(inputs=inputs,outputs=outputs)
    model.compile(loss='categorical_crossentropy', optimizer=optmz, 
                  metrics=['accuracy'])
    return model



# Setup the models
model       = createResNetV1() # This is meant for training
modelGo     = createResNetV1() # This is used for final testing

model.summary()
WARNING:tensorflow:From D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\keras\initializers.py:104: calling VarianceScaling.__init__ (from tensorflow.python.ops.init_ops) with distribution=normal is deprecated and will be removed in a future version.
Instructions for updating:
`normal` is a deprecated alias for `truncated_normal`
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            (None, 96, 96, 3)    0                                            
__________________________________________________________________________________________________
Inpt_conv (Conv2D)              (None, 96, 96, 16)   448         input_1[0][0]                    
__________________________________________________________________________________________________
Inpt_bn (BatchNormalization)    (None, 96, 96, 16)   64          Inpt_conv[0][0]                  
__________________________________________________________________________________________________
Inpt_relu (Activation)          (None, 96, 96, 16)   0           Inpt_bn[0][0]                    
__________________________________________________________________________________________________
Stg1_Blk1_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Inpt_relu[0][0]                  
__________________________________________________________________________________________________
Stg1_Blk1_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk1_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk1_add (Add)             (None, 96, 96, 16)   0           Inpt_relu[0][0]                  
                                                                 Stg1_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk1_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg1_Blk2_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg1_Blk2_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk2_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk2_add (Add)             (None, 96, 96, 16)   0           Stg1_Blk1_relu[0][0]             
                                                                 Stg1_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk2_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg1_Blk3_Res1_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg1_Blk3_Res1_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_Res1_relu (Activation (None, 96, 96, 16)   0           Stg1_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk3_Res2_conv (Conv2D)    (None, 96, 96, 16)   2320        Stg1_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_Res2_bn (BatchNormali (None, 96, 96, 16)   64          Stg1_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg1_Blk3_add (Add)             (None, 96, 96, 16)   0           Stg1_Blk2_relu[0][0]             
                                                                 Stg1_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg1_Blk3_relu (Activation)     (None, 96, 96, 16)   0           Stg1_Blk3_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk1_Res1_conv (Conv2D)    (None, 48, 48, 32)   4640        Stg1_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk1_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk1_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_lin_conv (Conv2D)     (None, 48, 48, 32)   544         Stg1_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk1_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk1_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk1_lin_conv[0][0]         
                                                                 Stg2_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk1_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk2_Res1_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk2_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk2_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk2_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk1_relu[0][0]             
                                                                 Stg2_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk2_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg2_Blk3_Res1_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg2_Blk3_Res1_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_Res1_relu (Activation (None, 48, 48, 32)   0           Stg2_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk3_Res2_conv (Conv2D)    (None, 48, 48, 32)   9248        Stg2_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_Res2_bn (BatchNormali (None, 48, 48, 32)   128         Stg2_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg2_Blk3_add (Add)             (None, 48, 48, 32)   0           Stg2_Blk2_relu[0][0]             
                                                                 Stg2_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg2_Blk3_relu (Activation)     (None, 48, 48, 32)   0           Stg2_Blk3_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk1_Res1_conv (Conv2D)    (None, 24, 24, 64)   18496       Stg2_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk1_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk1_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk1_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk1_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk1_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_lin_conv (Conv2D)     (None, 24, 24, 64)   2112        Stg2_Blk3_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk1_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk1_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk1_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk1_lin_conv[0][0]         
                                                                 Stg3_Blk1_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk1_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk1_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk2_Res1_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk1_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk2_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk2_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk2_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk2_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk2_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk2_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk2_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk1_relu[0][0]             
                                                                 Stg3_Blk2_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk2_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk2_add[0][0]              
__________________________________________________________________________________________________
Stg3_Blk3_Res1_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk2_relu[0][0]             
__________________________________________________________________________________________________
Stg3_Blk3_Res1_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk3_Res1_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_Res1_relu (Activation (None, 24, 24, 64)   0           Stg3_Blk3_Res1_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk3_Res2_conv (Conv2D)    (None, 24, 24, 64)   36928       Stg3_Blk3_Res1_relu[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_Res2_bn (BatchNormali (None, 24, 24, 64)   256         Stg3_Blk3_Res2_conv[0][0]        
__________________________________________________________________________________________________
Stg3_Blk3_add (Add)             (None, 24, 24, 64)   0           Stg3_Blk2_relu[0][0]             
                                                                 Stg3_Blk3_Res2_bn[0][0]          
__________________________________________________________________________________________________
Stg3_Blk3_relu (Activation)     (None, 24, 24, 64)   0           Stg3_Blk3_add[0][0]              
__________________________________________________________________________________________________
AvgPool (AveragePooling2D)      (None, 3, 3, 64)     0           Stg3_Blk3_relu[0][0]             
__________________________________________________________________________________________________
flatten (Flatten)               (None, 576)          0           AvgPool[0][0]                    
__________________________________________________________________________________________________
dense (Dense)                   (None, 2)            1154        flatten[0][0]                    
==================================================================================================
Total params: 274,946
Trainable params: 273,570
Non-trainable params: 1,376
__________________________________________________________________________________________________
In [11]:
# Create checkpoint for the training
# This checkpoint performs model saving when
# an epoch gives highest testing accuracy
# filepath        = modelname + ".hdf5"
# checkpoint      = ModelCheckpoint(filepath, 
#                                   monitor='val_acc', 
#                                   verbose=0, 
#                                   save_best_only=True, 
#                                   mode='max')

#                             # Log the epoch detail into csv
# csv_logger      = CSVLogger(modelname +'.csv')
# callbacks_list  = [checkpoint,csv_logger]

def lrSchedule(epoch):
    lr  = 1e-3
    
    if epoch > 70:
        lr  *= 0.5e-3
        
    elif epoch > 50:
        lr  *= 1e-3
        
    elif epoch > 40:
        lr  *= 1e-2
        
    elif epoch > 30:
        lr  *= 1e-1
        
    print('Learning rate: ', lr)
    
    return lr

LRScheduler     = LearningRateScheduler(lrSchedule)

                            # Create checkpoint for the training
                            # This checkpoint performs model saving when
                            # an epoch gives highest testing accuracy
filepath        = modelname + ".hdf5"
checkpoint      = ModelCheckpoint(filepath, 
                                  monitor='val_acc', 
                                  verbose=0, 
                                  save_best_only=True, 
                                  mode='max')

                            # Log the epoch detail into csv
csv_logger      = CSVLogger(modelname +'.csv')
callbacks_list  = [checkpoint, csv_logger, LRScheduler]
#callbacks_list  = [checkpoint, csv_logger]
In [12]:
# Fit the model
# This is where the training starts
model.fit(trDat, 
          trLbl, 
          validation_data=(tsDat, tsLbl), 
          epochs=120, 
          batch_size=32,
          callbacks=callbacks_list)

# datagen = ImageDataGenerator(width_shift_range=0.1,
#                              height_shift_range=0.1,
#                              rotation_range=30,
#                              horizontal_flip=True,
#                              vertical_flip=False)

# model.fit_generator(datagen.flow(trDat, trLbl, batch_size=64),
#                     validation_data=(tsDat, tsLbl),
#                     epochs=120, 
#                     verbose=1,
#                     steps_per_epoch=len(trDat)/64,
#                     callbacks=callbacks_list)
Train on 4264 samples, validate on 1067 samples
Epoch 1/120
Learning rate:  0.001
4264/4264 [==============================] - 33s 8ms/step - loss: 0.6984 - acc: 0.7500 - val_loss: 0.9059 - val_acc: 0.6279
Epoch 2/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.5501 - acc: 0.8288 - val_loss: 0.5265 - val_acc: 0.8416
Epoch 3/120
Learning rate:  0.001
4264/4264 [==============================] - 25s 6ms/step - loss: 0.4854 - acc: 0.8602 - val_loss: 0.5026 - val_acc: 0.8425
Epoch 4/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.4473 - acc: 0.8748 - val_loss: 0.4615 - val_acc: 0.8641
Epoch 5/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.4134 - acc: 0.8888 - val_loss: 0.5297 - val_acc: 0.8444
Epoch 6/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.3895 - acc: 0.8985 - val_loss: 0.4693 - val_acc: 0.8622
Epoch 7/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.3728 - acc: 0.9029 - val_loss: 1.1499 - val_acc: 0.7329
Epoch 8/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.3352 - acc: 0.9191 - val_loss: 0.5816 - val_acc: 0.8154
Epoch 9/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.2947 - acc: 0.9376 - val_loss: 0.5856 - val_acc: 0.8632
Epoch 10/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.2877 - acc: 0.9407 - val_loss: 0.5765 - val_acc: 0.8604
Epoch 11/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.2630 - acc: 0.9524 - val_loss: 0.5372 - val_acc: 0.8800
Epoch 12/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.2322 - acc: 0.9625 - val_loss: 0.7380 - val_acc: 0.8238
Epoch 13/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.2223 - acc: 0.9679 - val_loss: 0.6175 - val_acc: 0.8632
Epoch 14/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.2260 - acc: 0.9622 - val_loss: 1.8144 - val_acc: 0.7038
Epoch 15/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1994 - acc: 0.9737 - val_loss: 0.5394 - val_acc: 0.8782
Epoch 16/120
Learning rate:  0.001
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1897 - acc: 0.9784 - val_loss: 1.2960 - val_acc: 0.7535
Epoch 17/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1682 - acc: 0.9855 - val_loss: 0.7705 - val_acc: 0.8566
Epoch 18/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1942 - acc: 0.9765 - val_loss: 1.1835 - val_acc: 0.7769
Epoch 19/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1671 - acc: 0.9836 - val_loss: 0.9864 - val_acc: 0.8407
Epoch 20/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1796 - acc: 0.9812 - val_loss: 0.9448 - val_acc: 0.8238
Epoch 21/120
Learning rate:  0.001
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1788 - acc: 0.9796 - val_loss: 1.0601 - val_acc: 0.8013
Epoch 22/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1932 - acc: 0.9735 - val_loss: 0.7742 - val_acc: 0.8529
Epoch 23/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1540 - acc: 0.9883 - val_loss: 0.5842 - val_acc: 0.8688
Epoch 24/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1739 - acc: 0.9815 - val_loss: 0.6278 - val_acc: 0.8707
Epoch 25/120
Learning rate:  0.001
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1400 - acc: 0.9939 - val_loss: 0.7684 - val_acc: 0.8716
Epoch 26/120
Learning rate:  0.001
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1286 - acc: 0.9972 - val_loss: 0.6830 - val_acc: 0.8622
Epoch 27/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1325 - acc: 0.9944 - val_loss: 2.3332 - val_acc: 0.7254
Epoch 28/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.2431 - acc: 0.9599 - val_loss: 1.4348 - val_acc: 0.7554
Epoch 29/120
Learning rate:  0.001
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1790 - acc: 0.9782 - val_loss: 1.1552 - val_acc: 0.7591
Epoch 30/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1587 - acc: 0.9841 - val_loss: 0.5823 - val_acc: 0.8632
Epoch 31/120
Learning rate:  0.001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1426 - acc: 0.9904 - val_loss: 1.4404 - val_acc: 0.7498
Epoch 32/120
Learning rate:  0.0001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1261 - acc: 0.9962 - val_loss: 0.6006 - val_acc: 0.8782
Epoch 33/120
Learning rate:  0.0001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1217 - acc: 0.9981 - val_loss: 0.6006 - val_acc: 0.8791
Epoch 34/120
Learning rate:  0.0001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1179 - acc: 0.9993 - val_loss: 0.6183 - val_acc: 0.8763
Epoch 35/120
Learning rate:  0.0001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1161 - acc: 0.9998 - val_loss: 0.6273 - val_acc: 0.8763
Epoch 36/120
Learning rate:  0.0001
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1159 - acc: 0.9995 - val_loss: 0.6164 - val_acc: 0.8819
Epoch 37/120
Learning rate:  0.0001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1144 - acc: 0.9998 - val_loss: 0.6360 - val_acc: 0.8772
Epoch 38/120
Learning rate:  0.0001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1137 - acc: 1.0000 - val_loss: 0.6526 - val_acc: 0.8791
Epoch 39/120
Learning rate:  0.0001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1128 - acc: 1.0000 - val_loss: 0.6560 - val_acc: 0.8763
Epoch 40/120
Learning rate:  0.0001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1128 - acc: 0.9993 - val_loss: 0.6466 - val_acc: 0.8744
Epoch 41/120
Learning rate:  0.0001
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1114 - acc: 1.0000 - val_loss: 0.6529 - val_acc: 0.8763
Epoch 42/120
Learning rate:  1e-05
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1115 - acc: 0.9998 - val_loss: 0.6562 - val_acc: 0.8754
Epoch 43/120
Learning rate:  1e-05
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1115 - acc: 1.0000 - val_loss: 0.6595 - val_acc: 0.8744
Epoch 44/120
Learning rate:  1e-05
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1108 - acc: 1.0000 - val_loss: 0.6581 - val_acc: 0.8763
Epoch 45/120
Learning rate:  1e-05
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1106 - acc: 1.0000 - val_loss: 0.6594 - val_acc: 0.8763
Epoch 46/120
Learning rate:  1e-05
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1108 - acc: 1.0000 - val_loss: 0.6591 - val_acc: 0.8763
Epoch 47/120
Learning rate:  1e-05
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1107 - acc: 1.0000 - val_loss: 0.6622 - val_acc: 0.8782
Epoch 48/120
Learning rate:  1e-05
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1111 - acc: 0.9998 - val_loss: 0.6639 - val_acc: 0.8772
Epoch 49/120
Learning rate:  1e-05
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1102 - acc: 1.0000 - val_loss: 0.6624 - val_acc: 0.8772
Epoch 50/120
Learning rate:  1e-05
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1101 - acc: 1.0000 - val_loss: 0.6631 - val_acc: 0.8782
Epoch 51/120
Learning rate:  1e-05
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1109 - acc: 0.9998 - val_loss: 0.6627 - val_acc: 0.87630.999
Epoch 52/120
Learning rate:  1e-06
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1099 - acc: 1.0000 - val_loss: 0.6641 - val_acc: 0.8754
Epoch 53/120
Learning rate:  1e-06
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1111 - acc: 0.9995 - val_loss: 0.6641 - val_acc: 0.8754
Epoch 54/120
Learning rate:  1e-06
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1102 - acc: 1.0000 - val_loss: 0.6643 - val_acc: 0.8744
Epoch 55/120
Learning rate:  1e-06
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1102 - acc: 1.0000 - val_loss: 0.6628 - val_acc: 0.8744
Epoch 56/120
Learning rate:  1e-06
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1099 - acc: 1.0000 - val_loss: 0.6624 - val_acc: 0.8744
Epoch 57/120
Learning rate:  1e-06
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1098 - acc: 1.0000 - val_loss: 0.6632 - val_acc: 0.8744
Epoch 58/120
Learning rate:  1e-06
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1106 - acc: 0.9998 - val_loss: 0.6638 - val_acc: 0.8744
Epoch 59/120
Learning rate:  1e-06
4264/4264 [==============================] - 26s 6ms/step - loss: 0.1109 - acc: 0.9998 - val_loss: 0.6637 - val_acc: 0.8744
Epoch 60/120
Learning rate:  1e-06
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1097 - acc: 1.0000 - val_loss: 0.6629 - val_acc: 0.8744
Epoch 61/120
Learning rate:  1e-06
4264/4264 [==============================] - 25s 6ms/step - loss: 0.1100 - acc: 1.0000 - val_loss: 0.6634 - val_acc: 0.8744
Epoch 62/120
Learning rate:  1e-06
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1103 - acc: 1.0000 - val_loss: 0.6643 - val_acc: 0.8763
Epoch 63/120
Learning rate:  1e-06
4264/4264 [==============================] - 24s 6ms/step - loss: 0.1100 - acc: 1.0000 - val_loss: 0.6618 - val_acc: 0.8763
Epoch 64/120
Learning rate:  1e-06
 480/4264 [==>...........................] - ETA: 20s - loss: 0.1098 - acc: 1.0000
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-12-9c791ddcc8be> in <module>
      6           epochs=120,
      7           batch_size=32,
----> 8           callbacks=callbacks_list)
      9 
     10 # datagen = ImageDataGenerator(width_shift_range=0.1,

D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)
   1361           initial_epoch=initial_epoch,
   1362           steps_per_epoch=steps_per_epoch,
-> 1363           validation_steps=validation_steps)
   1364 
   1365   def evaluate(self,

D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\keras\engine\training_arrays.py in fit_loop(model, inputs, targets, sample_weights, batch_size, epochs, verbose, callbacks, val_inputs, val_targets, val_sample_weights, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)
    262           ins_batch[i] = ins_batch[i].toarray()
    263 
--> 264         outs = f(ins_batch)
    265         if not isinstance(outs, list):
    266           outs = [outs]

D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\keras\backend.py in __call__(self, inputs)
   2912       self._make_callable(feed_arrays, feed_symbols, symbol_vals, session)
   2913 
-> 2914     fetched = self._callable_fn(*array_vals)
   2915     self._call_fetch_callbacks(fetched[-len(self._fetches):])
   2916     return fetched[:len(self.outputs)]

D:\DocumentsDDrive\Installed_Files\Anaconda3\envs\tf-gpu\lib\site-packages\tensorflow\python\client\session.py in __call__(self, *args, **kwargs)
   1380           ret = tf_session.TF_SessionRunCallable(
   1381               self._session._session, self._handle, args, status,
-> 1382               run_metadata_ptr)
   1383         if run_metadata:
   1384           proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

KeyboardInterrupt: 
In [13]:
## Now the training is complete, we get
# another object to load the weights
# compile it, so that we can do 
# final evaluation on it
modelGo.load_weights(filepath)
modelGo.compile(loss='categorical_crossentropy', 
                optimizer='adam', 
                metrics=['accuracy'])
In [14]:
# Make classification on the test dataset
predicts    = modelGo.predict(tsDat)

# Prepare the classification output
# for the classification report
predout     = np.argmax(predicts,axis=1)
testout     = np.argmax(tsLbl,axis=1)
labelname   = ['flower', 'non-flower']
                                            # the labels for the classfication report


testScores  = metrics.accuracy_score(testout,predout)
confusion   = metrics.confusion_matrix(testout,predout)


print("Best accuracy (on testing dataset): %.2f%%" % (testScores*100))
print(metrics.classification_report(testout,predout,target_names=labelname,digits=4))
print(confusion)
Best accuracy (on testing dataset): 88.19%
              precision    recall  f1-score   support

      flower     0.8596    0.8634    0.8615       454
  non-flower     0.8985    0.8956    0.8971       613

    accuracy                         0.8819      1067
   macro avg     0.8791    0.8795    0.8793      1067
weighted avg     0.8820    0.8819    0.8819      1067

[[392  62]
 [ 64 549]]
In [15]:
import pandas as pd

records     = pd.read_csv(modelname +'.csv')
plt.figure()
plt.subplot(211)
plt.plot(records['val_loss'])
plt.plot(records['loss'])
plt.yticks([0, 0.20, 0.30, 0.4, 0.5])
plt.title('Loss value',fontsize=12)

ax          = plt.gca()
ax.set_xticklabels([])



plt.subplot(212)
plt.plot(records['val_acc'])
plt.plot(records['acc'])
plt.yticks([0.7, 0.8, 0.9, 1.0])
plt.title('Accuracy',fontsize=12)
plt.show()
In [16]:
wrong_ans_index = []

for i in range(len(predout)):
    if predout[i] != testout[i]:
        wrong_ans_index.append(i)
In [17]:
wrong_ans_index = list(set(wrong_ans_index))
In [ ]:
# Randomly show X examples of that was wrong

dataset = tsDatOrg #flowers #fungus #rocks

for index in wrong_ans_index:
    #index = wrong_ans_index[random.randint(0, len(wrong_ans_index)-1)]
    print("Showing %s index image" %(index))
    print("Predicted as %s but is actually %s" %(predout[index], testout[index]))
    imgplot = plt.imshow(data[index])
    plt.show()
Showing 515 index image
Predicted as 0 but is actually 1
Showing 1033 index image
Predicted as 1 but is actually 0
Showing 522 index image
Predicted as 0 but is actually 1
Showing 12 index image
Predicted as 0 but is actually 1
Showing 529 index image
Predicted as 0 but is actually 1
Showing 22 index image
Predicted as 0 but is actually 1
Showing 1046 index image
Predicted as 1 but is actually 0
Showing 1047 index image
Predicted as 1 but is actually 0
Showing 1050 index image
Predicted as 1 but is actually 0
Showing 539 index image
Predicted as 0 but is actually 1
Showing 540 index image
Predicted as 0 but is actually 1
Showing 29 index image
Predicted as 0 but is actually 1
Showing 1054 index image
Predicted as 1 but is actually 0
Showing 544 index image
Predicted as 0 but is actually 1
Showing 1056 index image
Predicted as 1 but is actually 0
Showing 555 index image
Predicted as 0 but is actually 1
Showing 558 index image
Predicted as 0 but is actually 1
Showing 570 index image
Predicted as 0 but is actually 1
Showing 65 index image
Predicted as 0 but is actually 1
Showing 72 index image
Predicted as 0 but is actually 1
Showing 78 index image
Predicted as 0 but is actually 1
Showing 92 index image
Predicted as 0 but is actually 1
Showing 605 index image
Predicted as 0 but is actually 1
Showing 97 index image
Predicted as 0 but is actually 1
Showing 615 index image
Predicted as 1 but is actually 0
Showing 616 index image
Predicted as 1 but is actually 0
Showing 617 index image
Predicted as 1 but is actually 0
Showing 109 index image
Predicted as 0 but is actually 1
Showing 622 index image
Predicted as 1 but is actually 0
Showing 111 index image
Predicted as 0 but is actually 1
Showing 623 index image
Predicted as 1 but is actually 0
Showing 113 index image
Predicted as 0 but is actually 1
Showing 634 index image
Predicted as 1 but is actually 0
Showing 131 index image
Predicted as 0 but is actually 1
Showing 645 index image
Predicted as 1 but is actually 0
Showing 137 index image
Predicted as 0 but is actually 1
Showing 662 index image
Predicted as 1 but is actually 0
Showing 664 index image
Predicted as 1 but is actually 0
Showing 163 index image
Predicted as 0 but is actually 1
Showing 678 index image
Predicted as 1 but is actually 0
Showing 169 index image
Predicted as 0 but is actually 1
Showing 172 index image
Predicted as 0 but is actually 1
Showing 686 index image
Predicted as 1 but is actually 0
Showing 176 index image
Predicted as 0 but is actually 1
Showing 695 index image
Predicted as 1 but is actually 0
Showing 184 index image
Predicted as 0 but is actually 1
Showing 188 index image
Predicted as 0 but is actually 1
Showing 703 index image
Predicted as 1 but is actually 0
Showing 707 index image
Predicted as 1 but is actually 0
Showing 715 index image
Predicted as 1 but is actually 0
Showing 204 index image
Predicted as 0 but is actually 1
Showing 208 index image
Predicted as 0 but is actually 1
Showing 725 index image
Predicted as 1 but is actually 0
Showing 220 index image
Predicted as 0 but is actually 1
Showing 732 index image
Predicted as 1 but is actually 0
Showing 222 index image
Predicted as 0 but is actually 1
Showing 733 index image
Predicted as 1 but is actually 0
Showing 228 index image
Predicted as 0 but is actually 1
Showing 740 index image
Predicted as 1 but is actually 0
Showing 743 index image
Predicted as 1 but is actually 0
Showing 233 index image
Predicted as 0 but is actually 1
Showing 752 index image
Predicted as 1 but is actually 0
Showing 253 index image
Predicted as 0 but is actually 1
Showing 259 index image
Predicted as 0 but is actually 1
Showing 263 index image
Predicted as 0 but is actually 1
Showing 775 index image
Predicted as 1 but is actually 0
Showing 268 index image
Predicted as 0 but is actually 1
Showing 269 index image
Predicted as 0 but is actually 1
Showing 790 index image
Predicted as 1 but is actually 0
Showing 279 index image
Predicted as 0 but is actually 1
Showing 792 index image
Predicted as 1 but is actually 0
Showing 794 index image
Predicted as 1 but is actually 0
Showing 796 index image
Predicted as 1 but is actually 0
Showing 798 index image
Predicted as 1 but is actually 0
Showing 288 index image
Predicted as 0 but is actually 1
Showing 805 index image
Predicted as 1 but is actually 0
Showing 300 index image
Predicted as 0 but is actually 1
Showing 817 index image
Predicted as 1 but is actually 0
Showing 311 index image
Predicted as 0 but is actually 1
Showing 830 index image
Predicted as 1 but is actually 0
Showing 831 index image
Predicted as 1 but is actually 0
Showing 834 index image
Predicted as 1 but is actually 0
Showing 336 index image
Predicted as 0 but is actually 1
Showing 848 index image
Predicted as 1 but is actually 0
Showing 347 index image
Predicted as 0 but is actually 1
Showing 349 index image
Predicted as 0 but is actually 1
Showing 862 index image
Predicted as 1 but is actually 0
Showing 863 index image
Predicted as 1 but is actually 0
Showing 867 index image
Predicted as 1 but is actually 0
Showing 873 index image
Predicted as 1 but is actually 0
In [ ]: